def mosaic_coverage_schema(self, coverage, store, workspace): """Returns the schema of a coverage in a coverage store""" params = dict() cs_url = url( self.service_url, [ "workspaces", workspace, "coveragestores", store, "coverages", coverage, "index.json" ], params ) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() schema = json.loads(response, object_hook=_decode_dict) return schema
def href(self): query = {'name': self.name} path_parts = ['layergroups'] if self.workspace is not None: workspace_name = getattr(self.workspace, 'name', self.workspace) path_parts = ["workspaces", workspace_name] + path_parts return url(self.catalog.service_url, path_parts, query)
def publish_featuretype(self, name, store, native_crs, srs=None, jdbc_virtual_table=None): """Publish a featuretype from data in an existing store""" # @todo native_srs doesn't seem to get detected, even when in the DB # metadata (at least for postgis in geometry_columns) and then there # will be a misconfigured layer if native_crs is None: raise ValueError("must specify native_crs") srs = srs or native_crs feature_type = FeatureType(self, store.workspace, store, name) # because name is the in FeatureType base class, work around that # and hack in these others that don't have xml properties feature_type.dirty['name'] = name feature_type.dirty['srs'] = srs feature_type.dirty['nativeCRS'] = native_crs feature_type.enabled = True feature_type.title = name headers = { "Content-type": "application/xml", "Accept": "application/xml" } resource_url = store.resource_url if jdbc_virtual_table is not None: feature_type.metadata = ({'JDBC_VIRTUAL_TABLE': jdbc_virtual_table}) params = dict() resource_url = url(self.service_url, ["workspaces", store.workspace.name, "datastores", store.name, "featuretypes.json"], params) headers, response = self.http.request(resource_url, "POST", feature_type.message(), headers) feature_type.fetch() return feature_type
def get_styles(self, workspace=None): styles_xml = "styles.xml" if workspace is not None: styles_xml = "workspaces/{0}/styles.xml".format(_name(workspace)) styles_url = url(self.service_url, [styles_xml]) description = self.get_xml(styles_url) return [Style(self, s.find('name').text) for s in description.findall("style")]
def get_layers(self, resource=None): if isinstance(resource, str): resource = self.get_resource(resource) layers_url = url(self.service_url, ["layers.xml"]) description = self.get_xml(layers_url) lyrs = [Layer(self, l.find("name").text) for l in description.findall("layer")] if resource is not None: lyrs = [l for l in lyrs if l.resource.href == resource.href] # TODO: Filter by style return lyrs
def get_layergroups(self, workspace=None): wks_name = None path_parts = ['layergroups.xml'] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts groups_url = url(self.service_url, path_parts) groups = self.get_xml(groups_url) return [LayerGroup(self, g.find("name").text, wks_name) for g in groups.findall("layerGroup")]
def get_layergroup(self, name=None, workspace=None): try: path_parts = ["layergroups", name + ".xml"] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts group_url = url(self.service_url, path_parts) group = self.get_xml(group_url) wks_name = group.find("workspace").find("name").text if group.find("workspace") else None return LayerGroup(self, group.find("name").text, wks_name) except FailedRequestError: return None
def _build_href(self, extension, create=False): path_parts = ["styles"] query = {} if not create: path_parts.append(self.name + extension) else: query['name'] = self.name if self.workspace is not None: path_parts = [ "workspaces", getattr(self.workspace, 'name', self.workspace) ] + path_parts return url(self.catalog.service_url, path_parts, query)
def _create_coveragestore(self, name, data, workspace=None, overwrite=False, external=False): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() archive = None ext = "geotiff" contet_type = "image/tiff" if not external else "text/plain" store_type = "file." if not external else "external." headers = { "Content-type": contet_type, "Accept": "application/xml" } message = data if not external: if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: # If application/archive was used, server crashes with a 500 error # read in many sites that application/zip will do the trick. Successfully tested headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, str): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, store_type + ext], {"configure": "first", "coverageName": name}) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: os.unlink(archive)
def add_granule(self, data, store, workspace=None): """Harvest/add a granule into an existing imagemosaic""" ext = os.path.splitext(data)[-1] if ext == ".zip": type = "file.imagemosaic" upload_data = open(data, 'rb') headers = { "Content-type": "application/zip", "Accept": "application/xml" } else: type = "external.imagemosaic" upload_data = data if data.startswith("file:") else "file:{data}".format(data=data) headers = { "Content-type": "text/plain", "Accept": "application/xml" } params = dict() workspace_name = workspace if isinstance(store, str): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, type ], params ) try: headers, response = self.http.request(cs_url, "POST", upload_data, headers) if headers.status != 202: raise UploadError(response) finally: if hasattr(upload_data, "close"): upload_data.close() self._cache.clear() return "Added granule"
def list_granules(self, coverage, store, workspace=None, filter=None, limit=None, offset=None): """List granules of an imagemosaic""" params = dict() if filter is not None: params['filter'] = filter if limit is not None: params['limit'] = limit if offset is not None: params['offset'] = offset workspace_name = workspace if isinstance(store, str): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, "coverages", coverage, "index/granules.json" ], params ) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() granules = json.loads(response, object_hook=_decode_dict) return granules
def get_resources(self, name=None): res_url = url(self.catalog.service_url, [ "workspaces", self.workspace.name, "coveragestores", self.name, "coverages.xml" ]) xml = self.catalog.get_xml(res_url) def cov_from_node(node): return coverage_from_index(self.catalog, self.workspace, self, node) # if name passed, return only one Coverage, otherwise return all Coverages in store: if name is not None: for node in xml.findall("coverage"): if node.findtext("name") == name: return cov_from_node(node) return None return [cov_from_node(node) for node in xml.findall("coverage")]
def delete_granule(self, coverage, store, granule_id, workspace=None): """Deletes a granule of an existing imagemosaic""" params = dict() workspace_name = workspace if isinstance(store, str): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, "coverages", coverage, "index/granules", granule_id, ".json" ], params ) # DELETE /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules/<granule_id>.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "DELETE", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() return "Deleted granule"
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): if isinstance(store, str): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % ( store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset params["filename"] = "{}.zip".format(name) params["target"] = "shp" # params["configure"] = "all" headers = {'Content-Type': 'application/zip', 'Accept': 'application/xml'} upload_url = url(self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: with open(bundle, "rb") as f: data = f.read() headers, response = self.http.request(upload_url, "PUT", data, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: # os.unlink(bundle) pass
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: # response = self.requests.post(ds_url, files={archive: open(archive, 'rb')}) headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: message.close() os.unlink(archive)
def resource_url(self): return url(self.catalog.service_url, [ "workspaces", self.workspace.name, "datastores", self.name, "featuretypes.xml" ])
def href(self): return url(self.catalog.service_url, [ "workspaces", self.workspace.name, "datastores", self.name + ".xml" ])
def href(self): return url(self.catalog.service_url, ["workspaces", self.workspace.name, "coveragestores"], dict(name=self.name))
def datastore_url(self): return url(self.catalog.service_url, ["workspaces", self.name, "datastores.xml"])
def href(self): path = ["workspaces", self.workspace.name, "datastores"] query = dict(name=self.name) return url(self.catalog.service_url, path, query)
def href(self): return self._href or url(self.catalog.service_url, [ "workspaces", self.workspace.name, self.url_part_stores, self.store.name, self.url_part_types, self.name + ".xml" ])
def href(self): path_parts = ["layergroups", self.name + ".xml"] if self.workspace is not None: workspace_name = getattr(self.workspace, 'name', self.workspace) path_parts = ["workspaces", workspace_name] + path_parts return url(self.catalog.service_url, path_parts)
def reset(self): reload_url = url(self.service_url, ['reset']) response = self.http.request(reload_url, "POST") self._cache.clear() return response
def create_imagemosaic(self, name, data, configure=None, workspace=None, overwrite=False, charset=None): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset if configure is not None: params['configure'] = "none" if isinstance(data, file) or os.path.splitext(data)[-1] == ".zip": store_type = "file.imagemosaic" contet_type = "application/zip" if isinstance(data, str): upload_data = open(data, 'rb') elif isinstance(data, file): # Adding this check only to pass tests. We should drop support for passing a file object upload_data = data else: raise ValueError("ImageMosaic Dataset or directory: {data} is incorrect".format(data=data)) else: store_type = "external.imagemosaic" contet_type = "text/plain" if isinstance(data, str): upload_data = data if data.startswith("file:") else "file:{data}".format(data=data) else: raise ValueError("ImageMosaic Dataset or directory: {data} is incorrect".format(data=data)) cs_url = url( self.service_url, [ "workspaces", workspace, "coveragestores", name, store_type ], params ) # PUT /workspaces/<ws>/coveragestores/<name>/file.imagemosaic?configure=none req_headers = { "Content-type": contet_type, "Accept": "application/xml" } try: resp_headers, response = self.http.request(cs_url, "PUT", upload_data, req_headers) self._cache.clear() if resp_headers.status != 201: raise UploadError(response) finally: if hasattr(upload_data, "close"): upload_data.close() return "Image Mosaic created"
def coveragestore_url(self): return url(self.catalog.service_url, ["workspaces", self.name, "coveragestores.xml"])