def add_data_to_store(self, store, name, data, workspace=None, overwrite = False, charset = None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset message = open(bundle) headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url(self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: headers, response = self.http.request(upload_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(bundle)
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = { "Content-type": "image/tiff", "Accept": "application/xml" } archive = None ext = "geotiff" if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive) if "tfw" in data: headers['Content-type'] = 'application/archive' ext = "worldimage" elif isinstance(data, basestring): message = open(data) else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, "file." + ext]) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if archive is not None: unlink(archive)
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = 'There is already a store named ' + name if workspace: msg += ' in ' + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # We don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = {'Content-type': 'image/tiff', 'Accept': 'application/xml'} archive = None ext = 'geotiff' if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive) if 'tfw' in data: headers['Content-type'] = 'application/archive' ext = 'worldimage' elif isinstance(data, basestring): message = open(data) else: message = data cs_url = url(self.service_url, ['workspaces', workspace.name, 'coveragestores', name, 'file.' + ext]) try: (headers, response) = self.http.request(cs_url, 'PUT', message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if archive is not None: unlink(archive)
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = 'There is already a store named ' + name if workspace: msg += ' in ' + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # We don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ['workspaces', workspace, 'datastores', name, 'file.shp'], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = {'Content-type': 'application/zip', 'Accept': 'application/xml'} if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive) try: (headers, response) = self.http.request(ds_url, 'PUT', message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(archive)
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, \ 'Specified store (%s) is not in specified workspace (%s)!' \ % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params['update'] = 'overwrite' if charset is not None: params['charset'] = charset message = open(bundle) headers = {'Content-Type': 'application/zip', 'Accept': 'application/xml'} upload_url = url(self.service_url, ['workspaces', workspace, 'datastores', store, 'file.shp'], params) try: (headers, response) = self.http.request(upload_url, 'PUT', message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(bundle)