def add_data_to_store(self, store, name, data, workspace=None, overwrite = False, charset = None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset message = open(bundle) headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url(self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: headers, response = self.http.request(upload_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(bundle)
def add_data_to_store(self, store, name, data, overwrite = False, charset = None): if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["overwrite"] = True if charset is not None: params["charset"] = charset if len(params): params = "?" + urlencode(params) else: params = "" message = open(bundle) headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } url = "%s/workspaces/%s/datastores/%s/file.shp%s" % ( self.service_url, store.workspace.name, store.name, params) try: headers, response = self.http.request(url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(bundle)
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if overwrite == False and self.get_store(name, workspace) is not None: fullname = "%s :: %s" % (workspace.name, name) if workspace is not None else name raise ConflictingDataError("There is already a store named %s" % fullname) if workspace is None: workspace = self.get_default_workspace() headers = {"Content-type": "image/tiff", "Accept": "application/xml"} zip = None ext = "geotiff" if isinstance(data, dict): zip = prepare_upload_bundle(name, data) message = open(zip).read() if "tfw" in data: headers["Content-type"] = "application/zip" ext = "worldimage" elif isinstance(data, basestring): message = open(data).read() else: message = data.read() cs_url = "%s/workspaces/%s/coveragestores/%s/file.%s" % (self.service_url, workspace.name, name, ext) try: headers, response = self.http.request(safe_urlquote(cs_url), "PUT", message, headers) if headers.status != 201: raise UploadError(response) finally: if zip is not None: unlink(zip)
def add_data_to_store(self, store, name, data, workspace=None, overwrite = False, charset = None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url(self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) with open(bundle, "rb") as f: data = f.read() headers, response = self.http.request(upload_url, "PUT", data, headers) self._cache.clear() if headers.status != 201: raise UploadError(response)
def _create_coveragestore( self, name, data, workspace=None, overwrite=False, external=False ): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() archive = None ext = "geotiff" content_type = "image/tiff" if not external else "text/plain" store_type = "file." if not external else "external." headers = {"Content-type": content_type, "Accept": "application/xml"} message = data if not external: if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, "rb") if "tfw" in data: # If application/archive was used, server crashes with # a 500 error read in many sites that application/zip # will do the trick. Successfully tested headers["Content-type"] = "application/zip" ext = "worldimage" elif isinstance(data, str): message = open(data, "rb") else: message = data cs_url = urljoin( self.service_url, "workspaces/{}/coveragestores/{}/{}{}".format( workspace.name, name, store_type, ext ), ) params = {"configure": "first", "coverageName": name} try: r = self.session.put(cs_url, data=message, headers=headers, params=params) self._cache.clear() if r.status_code != 201: raise UploadError(r.text) finally: if hasattr(message, "close"): message.close() if archive is not None: os.unlink(archive)
def _create_coveragestore(self, name, data, workspace=None, overwrite=False, external=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() archive = None ext = "geotiff" contet_type = "image/tiff" if not external else "text/plain" store_type = "file." if not external else "external." headers = { "Content-type": contet_type, "Accept": "application/xml" } message = data if not external: if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: # If application/archive was used, server crashes with a 500 error # read in many sites that application/zip will do the trick. Successfully tested headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, store_type + ext], { "configure" : "first", "coverageName" : name}) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): if isinstance(store, string_types): store = self.get_stores(names=store, workspaces=[workspace])[0] if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % ( store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset params["filename"] = "{}.zip".format(name) params["target"] = "shp" # params["configure"] = "all" headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = build_url( self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: with open(bundle, "rb") as f: data = f.read() resp = self.http_request(upload_url, method='put', data=data, headers=headers) if resp.status_code != 201: raise FailedRequestError( 'Failed to add data to store {} : {}, {}'.format( store, resp.status_code, resp.text)) self._cache.clear() finally: # os.unlink(bundle) pass
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, \ "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset params["filename"] = "{0}.zip".format(name) params["target"] = "shp" # params["configure"] = "all" headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url( self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"]) try: with open(bundle, "rb") as f: files = {'file': f} response = requests.put(upload_url, files=files, headers=headers, params=params, auth=(self.username, self.password)) self._cache.clear() if response.status_code != 201: raise UploadError('{0} - "{1}"'.format( response.status_code, response.text)) finally: # os.unlink(bundle) pass
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): """ Create a shapefile datastore from a shapefile. """ if not overwrite: try: self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params["charset"] = charset ds_url = urljoin( self.service_url, "workspaces/{}/datastores/{}/file.shp".format(workspace, name), ) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): LOGGER.debug("Data is NOT a zipfile") archive = prepare_upload_bundle(name, data) else: LOGGER.debug("Data is a zipfile") archive = data message = open(archive, "rb") try: r = self.session.put(ds_url, data=message, headers=headers, params=params) self._cache.clear() if r.status_code != 201: raise UploadError(r.text) finally: message.close() os.unlink(archive)
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): """ Add shapefile data to store. """ if isinstance(store, str): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) msg = "Specified store ({}) is not in specified workspace ({})!" msg = msg.format(store, workspace) assert store.workspace.name == workspace, msg else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset headers = { "Content-Type": "application/zip", "Accept": "application/xml" } upload_url = urljoin( self.service_url, "workspaces/{}/datastores/{}/file.shp".format(workspace, store), ) try: with open(bundle, "rb") as f: data = f.read() r = self.session.put(upload_url, params=params, data=data, headers=headers) self._cache.clear() if r.status_code != 201: raise UploadError(r.text) finally: os.unlink(bundle)
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = {"Content-type": "image/tiff", "Accept": "application/xml"} archive = None ext = "geotiff" if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive) if "tfw" in data: headers['Content-type'] = 'application/archive' ext = "worldimage" elif isinstance(data, basestring): message = open(data) else: message = data cs_url = url(self.service_url, [ "workspaces", workspace.name, "coveragestores", name, "file." + ext ]) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: # response = self.requests.post(ds_url, files={archive: open(archive, 'rb')}) headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: message.close() unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) if not overwrite: stores = self.get_stores(names=name, workspaces=[workspace]) if len(stores) > 0: msg = "There is already a store named {} in workspace {}".format( name, workspace) raise ConflictingDataError(msg) params = dict() if charset is not None: params['charset'] = charset url = build_url( self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data file_obj = open(archive, 'rb') try: resp = self.http_request(url, method='put', data=file_obj, headers=headers) if resp.status_code != 201: raise FailedRequestError( 'Failed to create FeatureStore {} : {}, {}'.format( name, resp.status_code, resp.text)) self._cache.clear() finally: file_obj.close() os.unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): """ Create a shapefile datastore from a shapefile. """ if not overwrite: try: self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = urljoin( self.service_url, "workspaces/{}/datastores/{}/file.shp".format( workspace, name ) ) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data,dict): LOGGER.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: LOGGER.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: r = self.session.put(ds_url, data=message, headers=headers, params=params) self._cache.clear() if r.status_code != 201: raise UploadError(r.text) finally: message.close() os.unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"]) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: response = self.request(method='put', url=ds_url, headers=headers, data=message, params=params) self._cache.clear() if response.status_code != 201: raise UploadError('{0} - "{1}"'.format(response.status_code, response.text)) finally: message.close() os.unlink(archive)
def add_data_to_store(self, store, name, data, workspace=None, overwrite = False, charset = None): if isinstance(store, basestring): store = self.get_stores(names=store, workspaces=workspace)[0] if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset params["filename"] = "{}.zip".format(name) params["target"] = "shp" # params["configure"] = "all" headers = {'Content-Type': 'application/zip', 'Accept': 'application/xml'} upload_url = build_url( self.service_url, [ "workspaces", workspace, "datastores", store, "file.shp" ], params ) try: with open(bundle, "rb") as f: data = f.read() resp = self.http_request(upload_url, method='put', data=data, headers=headers) if resp.status_code != 201: FailedRequestError('Failed to add data to store {} : {}, {}'.format(store, resp.status_code, resp.text)) self._cache.clear() finally: # os.unlink(bundle) pass
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) if not overwrite: stores = self.get_stores(names=name, workspaces=workspace) if len(stores) > 0: msg = "There is already a store named {} in workspace {}".format(name, workspace) raise ConflictingDataError(msg) params = dict() if charset is not None: params['charset'] = charset url = build_url( self.service_url, [ "workspaces", workspace, "datastores", name, "file.shp" ], params ) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data file_obj = open(archive, 'rb') try: resp = self.http_request(url, method='put', data=file_obj, headers=headers) if resp.status_code != 201: FailedRequestError('Failed to create FeatureStore {} : {}, {}'.format(name, resp.status_code, resp.text)) self._cache.clear() finally: file_obj.close() os.unlink(archive)
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = { "Content-type": "image/tiff", "Accept": "application/xml" } archive = None ext = "geotiff" if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, "file." + ext]) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False): if overwrite == False and self.get_store(name, workspace) is not None: fullname = "%s :: %s" % (workspace.name, name) if workspace is not None else name raise ConflictingDataError("There is already a store named %s" % fullname) if workspace is None: workspace = self.get_default_workspace() ds_url = "%s/workspaces/%s/datastores/%s/file.shp" % (self.service_url, workspace.name, name) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = {"Content-type": "application/zip", "Accept": "application/xml"} zip = prepare_upload_bundle(name, data) message = open(zip).read() try: headers, response = self.http.request(safe_urlquote(ds_url), "PUT", message, headers) if headers.status != 201: raise UploadError(response) finally: unlink(zip)
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): """ Add shapefile data to store. """ if isinstance(store, str): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) msg = "Specified store ({}) is not in specified workspace ({})!" msg = msg.format(store, workspace) assert store.workspace.name == workspace, msg else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = urljoin( self.service_url, "workspaces/{}/datastores/{}/file.shp".format( workspace, store ) ) try: with open(bundle, "rb") as f: data = f.read() r = self.session.put(upload_url, params=params, data=data, headers=headers) self._cache.clear() if r.status_code != 201: raise UploadError(r.text) finally: os.unlink(bundle)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data,dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: # response = self.requests.post(ds_url, files={archive: open(archive, 'rb')}) headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: message.close() unlink(archive)
def _create_coveragestore(self, name, data, workspace=None, overwrite=False, external=False): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() archive = None ext = "geotiff" contet_type = "image/tiff" if not external else "text/plain" store_type = "file." if not external else "external." headers = {"Content-type": contet_type, "Accept": "application/xml"} message = data if not external: if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: # If application/archive was used, server crashes with a 500 error # read in many sites that application/zip will do the trick. Successfully tested headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, [ "workspaces", workspace.name, "coveragestores", name, store_type + ext ], { "configure": "first", "coverageName": name }) try: response = self.request(method='put', url=cs_url, headers=headers, data=message) self._cache.clear() if response.status_code != 201: raise UploadError('{0} - "{1}"'.format(response.status_code, response.text)) finally: if getattr(message, "close", None) is not None: message.close() if archive is not None: os.unlink(archive)
if workspace is None: workspace = self.get_default_workspace() if charset: ds_url = "%s/workspaces/%s/datastores/%s/file.shp?charset=%s" % (self.service_url, workspace.name, name, charset) else: ds_url = "%s/workspaces/%s/datastores/%s/file.shp" % (self.service_url, workspace.name, name) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data,dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive) try: headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(archive) def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: