def put(self, dataset, id): """Update a dataset feature Update dataset feature with ID from a GeoJSON Feature and return it as a GeoJSON Feature. """ args = feature_multipart_parser.parse_args() try: feature = json.loads(args['feature']) except: feature = None if not isinstance(feature, dict): api.abort(400, "feature is not an object") # Validate attachments attachments = attachments_service_handler() for key in request.files: filedata = request.files[key] if not attachments.validate_attachment(dataset, filedata): api.abort(404, "Attachment validation failed: " + key) # Save attachments saved_attachments = {} for key in request.files: filedata = request.files[key] slug = attachments.save_attachment(dataset, filedata) if not slug: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) api.abort(404, "Failed to save attachment: " + key) else: saved_attachments[key] = slug field = key.lstrip("file:") feature["properties"][field] = "attachment://" + slug data_service = data_service_handler() prev = data_service.show(get_auth_user(), dataset, id, None) if prev: prev_feature = prev["feature"] # If a non-empty attachment field value is changed, delete the attachment for key in feature["properties"]: if key in prev_feature["properties"] and prev_feature["properties"][key] and str(prev_feature["properties"][key]).startswith("attachment://") and feature["properties"][key] != prev_feature["properties"][key]: attachments.remove_attachment(dataset, prev_feature["properties"][key].lstrip("attachment://")) result = data_service.update( get_auth_user(), dataset, id, feature ) if 'error' not in result: return result['feature'] else: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) error_code = result.get('error_code') or 404 error_details = result.get('error_details') or {} api.abort(error_code, result['error'], **error_details)
def get(self): args = get_relations_parser.parse_args() data_service = data_service_handler() keyvals = args['tables'] or "" ret = {} for keyval in keyvals.split(","): try: table, key_field_name, value_field_name = keyval.split(":") except: continue ret[table] = [] result = data_service.index(get_auth_user(), table, None, None, None) if 'feature_collection' in result: for feature in result['feature_collection']['features']: record = { "key": feature["id"] if key_field_name == "id" else feature['properties'][key_field_name], "value": feature['properties'][value_field_name].strip() } ret[table].append(record) return {"keyvalues": ret}
def put(self, dataset, id): """Update a dataset feature Update dataset feature with ID from a GeoJSON Feature and return it as a GeoJSON Feature. """ config_handler = RuntimeConfig("data", app.logger) config = config_handler.tenant_config(tenant_handler.tenant()) edit_user_field = config.get("edit_user_field", None) edit_timestamp_field = config.get("edit_timestamp_field", None) if request.is_json: # parse request data (NOTE: catches invalid JSON) feature = api.payload if isinstance(feature, dict): data_service = data_service_handler() internal_fields = {} if edit_user_field: feature["properties"][edit_user_field] = get_auth_user() internal_fields[edit_user_field] = { 'name': edit_user_field, 'data_type': 'text' } if edit_timestamp_field: feature["properties"][edit_timestamp_field] = str( datetime.now()) internal_fields[edit_timestamp_field] = { 'name': edit_timestamp_field, 'data_type': 'text' } result = data_service.update(get_auth_user(), dataset, id, feature, internal_fields) if 'error' not in result: result['feature']['properties'] = dict( filter(lambda x: x[0] not in internal_fields, result['feature']['properties'].items())) return result['feature'] else: error_code = result.get('error_code') or 404 error_details = result.get('error_details') or {} api.abort(error_code, result['error'], **error_details) else: api.abort(400, "JSON is not an object") else: api.abort(400, "Request data is not JSON")
def post(self, program, path): link = self.__get_link(get_auth_user(), program, path, request.args) headers = {'content-type': request.headers['content-type']} req = requests.post(link, stream=True, timeout=30, data=request.get_data(), headers=headers) return self.__get_response(req)
def get(self, dataproduct_id): """ Metadata of dataproduct """ result = dataproduct_service.dataproduct( get_auth_user(), dataproduct_id ) if result: return result else: api.abort(404, "Dataproduct not found or permission error")
def delete(self, dataset, id): """Delete a dataset feature Delete dataset feature with ID. """ data_service = data_service_handler() result = data_service.destroy(get_auth_user(), dataset, id) if 'error' not in result: return {'message': "Dataset feature deleted"} else: error_code = result.get('error_code') or 404 api.abort(error_code, result['error'])
def attachments_diff(self, data_service, attachments, dataset, rel_table, rel_record_id, feature, internal_fields, upload_user_field_suffix, record_deleted=False): newattachments = [] oldattachments = [] prev = data_service.show(get_auth_user(), rel_table, rel_record_id, None) if not prev: return (newattachments, oldattachments) prev_feature = prev["feature"] # If a attachment field value is changed, delete the attachment keys = list(feature["properties"].keys()) for key in keys: if (key in prev_feature["properties"] and feature["properties"][key] != prev_feature["properties"][key]) or record_deleted: if str(prev_feature["properties"][key]).startswith( "attachment://"): oldattachments.append(prev_feature["properties"][key]) if upload_user_field_suffix: upload_user_field = key + "__" + upload_user_field_suffix feature["properties"][ upload_user_field] = get_auth_user() internal_fields[upload_user_field] = { 'name': upload_user_field, 'data_type': 'text' } if str(feature["properties"][key]).startswith("attachment://"): newattachments.append(feature["properties"][key]) return (newattachments, oldattachments)
def get(self, service_name): """OGC service request GET request for an OGC service (WMS, WFS). """ ogc_service = ogc_service_handler() response = ogc_service.get(get_auth_user(), service_name, request.host, request.args, request.script_root) filename = request.values.get('filename') if filename: response.headers[ 'content-disposition'] = 'attachment; filename=' + filename return response
def get(self, dataset): """Get dataset geometries Return dataset geometries with where clause filters. The matching features are returned as GeoJSON FeatureCollection. """ filterexpr = request.args.get('filter') handler = search_geom_handler() result = handler.query(get_auth_user(), dataset, filterexpr) if 'error' not in result: return result['feature_collection'] else: error_code = result.get('error_code') or 404 api.abort(error_code, result['error'])
def attachments_diff(self, data_service, attachments, dataset, rel_table, rel_record_id, feature): newattachments = [] oldattachments = [] prev = data_service.show(get_auth_user(), rel_table, rel_record_id, None) if not prev: return (newattachments, oldattachments) prev_feature = prev["feature"] # If a attachment field value is changed, delete the attachment for key in feature["properties"]: if key in prev_feature["properties"] and feature["properties"][key] != prev_feature["properties"][key]: if str(prev_feature["properties"][key]).startswith("attachment://"): oldattachments.append(prev_feature["properties"][key]) if str(feature["properties"][key]).startswith("attachment://"): newattachments.append(feature["properties"][key]) return (newattachments, oldattachments)
def get(self): """ List of selected dataproducts with web display information Retrieves the layers with names exactly matching those in the specified list, in that order. """ layers = request.args.get('filter', "") return_layers = {} for layer in layers.split(","): results = weblayers_service.weblayers(get_auth_user(), layer) try: return_layers[layer] = [results] except: pass return return_layers
def post(self, dataset): """Create a new dataset feature Create new dataset feature from a GeoJSON Feature and return it as a GeoJSON Feature. """ args = feature_multipart_parser.parse_args() try: feature = json.loads(args['feature']) except: feature = None if not isinstance(feature, dict): api.abort(400, "feature is not an object") # Validate attachments attachments = attachments_service_handler() for key in request.files: filedata = request.files[key] if not attachments.validate_attachment(dataset, filedata): api.abort(404, "Attachment validation failed: " + key) # Save attachments saved_attachments = {} for key in request.files: filedata = request.files[key] slug = attachments.save_attachment(dataset, filedata) if not slug: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) api.abort(404, "Failed to save attachment: " + key) else: saved_attachments[key] = slug field = key.lstrip("file:") feature["properties"][field] = "attachment://" + slug data_service = data_service_handler() result = data_service.create( get_auth_user(), dataset, feature) if 'error' not in result: return result['feature'], 201 else: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) error_code = result.get('error_code') or 404 error_details = result.get('error_details') or {} api.abort(error_code, result['error'], **error_details)
def post(self, service_name): """OGC service request POST request for an OGC service (WMS, WFS). """ # NOTE: use combined parameters from request args and form ogc_service = ogc_service_handler() response = ogc_service.post(get_auth_user(), service_name, request.host_url, request.values, request.script_root, request.origin) filename = request.values.get('filename') if filename: response.headers[ 'content-disposition'] = 'attachment; filename=' + filename return response
def get(self, service_name): """Get legend graphic Return legend graphic for specified layer """ args = legend_parser.parse_args() layer_param = args['layer'] or '' format_param = args['format'] or 'image/png' type = (args['type'] or 'default').lower() params = { "bbox": args['bbox'] or '', "crs": args['crs'] or '', "scale": args['scale'] or '', "width": args['width'] or '', "height": args['height'] or '', "dpi": args['dpi'] or '', "boxspace": args['boxspace'] or '', "layerspace": args['layerspace'] or '', "layertitlespace": args['layertitlespace'] or '', "symbolspace": args['symbolspace'] or '', "iconlabelspace": args['iconlabelspace'] or '', "symbolwidth": args['symbolwidth'] or '', "symbolheight": args['symbolheight'] or '', "layerfontfamily": args['layerfontfamily'] or '', "itemfontfamily": args['itemfontfamily'] or '', "layerfontbold": args['layerfontbold'] or '', "itemfontbold": args['itemfontbold'] or '', "layerfontsize": args['layerfontsize'] or '', "itemfontsize": args['itemfontsize'] or '', "layerfontitalic": args['layerfontitalic'] or '', "itemfontitalic": args['itemfontitalic'] or '', "layerfontcolor": args['layerfontcolor'] or '', "itemfontcolor": args['itemfontcolor'] or '', "layertitle": args['layertitle'] or '', "transparent": args['transparent'] or '', "rulelabel": args['rulelabel'] or '' } # Filter empty params params = {k: v for k, v in params.items() if v} legend_service = legend_service_handler() return legend_service.get_legend( service_name, layer_param, format_param, params, type, get_auth_user() )
def get(self, dataset, id): """Get a dataset feature Return dataset feature with ID as a GeoJSON Feature. Query parameter: <b>crs</b>: Client CRS, e.g. <b>EPSG:3857<b> """ args = show_parser.parse_args() crs = args['crs'] data_service = data_service_handler() result = data_service.show(get_auth_user(), dataset, id, crs) if 'error' not in result: return result['feature'] else: api.abort(404, result['error'])
def get(self, dataset): """Get dataset features Return dataset features inside bounding box and matching filter as a GeoJSON FeatureCollection. """ args = index_parser.parse_args() bbox = args['bbox'] crs = args['crs'] filterexpr = args['filter'] data_service = data_service_handler() result = data_service.index(get_auth_user(), dataset, bbox, crs, filterexpr) if 'error' not in result: return result['feature_collection'] else: error_code = result.get('error_code') or 404 api.abort(error_code, result['error'])
def get(self, dataset, id): data_service = data_service_handler() args = get_relations_parser.parse_args() relations = args['tables'] or "" ret = {} for relation in relations.split(","): try: table, fk_field_name = relation.split(":") except: continue ret[table] = {"fk": fk_field_name, "records": []} result = data_service.index( get_auth_user(), table, None, None, '[["%s", "=", %d]]' % (fk_field_name, id) ) if 'feature_collection' in result: for feature in result['feature_collection']['features']: record = {(table + "__" + k): v for k, v in feature['properties'].items()} record["id"] = feature["id"] ret[table]['records'].append(record) ret[table]['records'].sort(key=lambda r: r["id"]) return {"relationvalues": ret}
def get_document(tenant, template, format): """Return report with specified template and format. :param str template: Template ID :param str format: Document format """ config = config_handler.tenant_config(tenant) jasper_service_url = config.get('jasper_service_url', 'http://localhost:8002/reports') jasper_timeout = config.get("jasper_timeout", 60) resources = config.resources().get('document_templates', []) permissions_handler = PermissionsReader(tenant, app.logger) permitted_resources = permissions_handler.resource_permissions( 'document_templates', get_auth_user()) if template in permitted_resources: resource = list( filter(lambda entry: entry.get("template") == template, resources)) if len(resource) != 1: app.logger.info("Template '%s' not found in config", template) abort(404) jasper_template = resource[0]['report_filename'] # http://localhost:8002/reports/BelasteteStandorte/?format=pdf&p1=v1&.. url = "%s/%s/" % (jasper_service_url, jasper_template) params = {"format": format} for k, v in request.args.lists(): params[k] = v app.logger.info("Forward request to %s?%s" % (url, urlencode(params))) response = requests.get(url, params=params, timeout=jasper_timeout) r = Response(stream_with_context( response.iter_content(chunk_size=16 * 1024)), content_type=response.headers['content-type'], status=response.status_code) return r else: app.logger.info("Missing permissions for template '%s'", template) abort(404)
def get(self): """Search for searchtext and return the results """ searchtext = request.args.get('searchtext', None) filter_param = request.args.get('filter', "") limit = request.args.get('limit', None) try: if limit: limit = int(limit) if limit <= 0: limit = None except ValueError: limit = None # split filter and trim whitespace filter = [s.strip() for s in filter_param.split(',')] # remove empty strings filter = [s for s in filter if len(s) > 0] handler = search_handler() result = handler.search(get_auth_user(), searchtext, filter, limit) return result
def post(self, dataset): """Create a new dataset feature Create new dataset feature from a GeoJSON Feature and return it as a GeoJSON Feature. """ if request.is_json: # parse request data (NOTE: catches invalid JSON) payload = api.payload if isinstance(payload, dict): data_service = data_service_handler() result = data_service.create( get_auth_user(), dataset, payload) if 'error' not in result: return result['feature'], 201 else: error_code = result.get('error_code') or 404 error_details = result.get('error_details') or {} api.abort(error_code, result['error'], **error_details) else: api.abort(400, "JSON is not an object") else: api.abort(400, "Request data is not JSON")
def post(self, dataset, id): """Update relation values for the specified dataset Return success status for each relation value. """ args = post_relations_parser.parse_args() try: payload = json.loads(args['values']) except: payload = None if not isinstance(payload, dict): api.abort(400, "JSON is not an object") data_service = data_service_handler() config_handler = RuntimeConfig("data", app.logger) config = config_handler.tenant_config(tenant_handler.tenant()) upload_user_field_suffix = config.get("upload_user_field_suffix", None) edit_user_field = config.get("edit_user_field", None) edit_timestamp_field = config.get("edit_timestamp_field", None) # Check if dataset with specified id exists if not data_service.is_editable(get_auth_user(), dataset, id): api.abort(404, "Dataset or feature not found or permission error") # Validate attachments attachments = attachments_service_handler() for key in request.files: filedata = request.files[key] attachment_valid, message = attachments.validate_attachment( dataset, filedata) if not attachment_valid: api.abort( 404, "Attachment validation failed for " + key + ": " + message) # Save attachments saved_attachments = {} internal_fields = [] for key in request.files: filedata = request.files[key] slug = attachments.save_attachment(dataset, filedata) if not slug: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) api.abort(404, "Failed to save attachment: " + key) else: saved_attachments[key] = slug parts = key.lstrip("file:").split("__") table = parts[0] field = parts[1] index = parts[2] payload[table]["records"][int(index)][ table + "__" + field] = "attachment://" + slug if upload_user_field_suffix: upload_user_field = table + "__" + field + "__" + upload_user_field_suffix payload[table]["records"][int( index)][upload_user_field] = get_auth_user() internal_fields.append(upload_user_field) ret = {} haserrors = False for (rel_table, rel_data) in payload.items(): fk_field = rel_data.get("fk", None) ret[rel_table] = {"fk": fk_field, "records": []} tbl_prefix = rel_table + "__" for rel_record in rel_data.get("records", []): # Set foreign key for new records if rel_record.get("__status__", "") == "new": rel_record[tbl_prefix + fk_field] = id if rel_record.get(tbl_prefix + fk_field, None) != id: rel_record["__error__"] = "FK validation failed" ret[rel_table]["records"].append(rel_record) haserrors = True else: entry = { "type": "Feature", "id": rel_record["id"] if "id" in rel_record else None, "properties": { k[len(tbl_prefix):]: v for k, v in rel_record.items() if k.startswith(tbl_prefix) } } table_internal_fields = { n[len(tbl_prefix):]: { 'name': n[len(tbl_prefix):], 'data_type': 'text' } for n in internal_fields if n.startswith(tbl_prefix) } if edit_user_field: entry["properties"][edit_user_field] = get_auth_user() table_internal_fields[edit_user_field] = { 'name': edit_user_field, 'data_type': 'text' } if edit_timestamp_field: entry["properties"][edit_timestamp_field] = str( datetime.now()) table_internal_fields[edit_timestamp_field] = { 'name': edit_timestamp_field, 'data_type': 'text' } if not "__status__" in rel_record: ret[rel_table]["records"].append(rel_record) continue elif rel_record["__status__"] == "new": result = data_service.create(get_auth_user(), rel_table, entry, table_internal_fields) elif rel_record["__status__"] == "changed": (newattachments, oldattachments) = self.attachments_diff( data_service, attachments, dataset, rel_table, rel_record["id"], entry, table_internal_fields, upload_user_field_suffix) result = data_service.update(get_auth_user(), rel_table, rel_record["id"], entry, table_internal_fields) self.cleanup_attachments( attachments, dataset, newattachments if "error" in result else oldattachments) elif rel_record["__status__"].startswith("deleted"): (newattachments, oldattachments) = self.attachments_diff( data_service, attachments, dataset, rel_table, rel_record["id"], entry, table_internal_fields, upload_user_field_suffix, True) if upload_user_field_suffix: data_service.update(get_auth_user(), rel_table, rel_record["id"], entry, table_internal_fields) result = data_service.destroy(get_auth_user(), rel_table, rel_record["id"]) self.cleanup_attachments( attachments, dataset, newattachments if "error" in result else oldattachments) else: continue if "error" in result: rel_record["error"] = result["error"] rel_record["error_details"] = result.get( 'error_details') or {} ret[rel_table]["records"].append(rel_record) haserrors = True elif "feature" in result: rel_record = {(rel_table + "__" + k): v for k, v in result['feature'] ['properties'].items() if not k in table_internal_fields} rel_record["id"] = result['feature']["id"] ret[rel_table]["records"].append(rel_record) return {"relationvalues": ret, "success": not haserrors}
def post(self, mapid): """Submit query Return map print """ tenant = tenant_handler.tenant() config = config_handler.tenant_config(tenant) identity = get_auth_user() ogc_service_url = config.get('ogc_service_url', 'http://localhost:5013/') print_pdf_filename = config.get('print_pdf_filename') qgs_postfix = config.get('qgs_postfix', '') qgis_server_version = config.get('qgis_server_version', '2.18.19') label_queries_config = config.get('label_queries', []) # TODO: read resources post_params = dict(request.form.items()) app.logger.info("POST params: %s" % post_params) content_disposition = post_params.get('CONTENT_DISPOSITION', 'attachment') if 'CONTENT_DISPOSITION' in post_params: del post_params['CONTENT_DISPOSITION'] params = {"SERVICE": "WMS", "VERSION": "1.3.0", "REQUEST": "GetPrint"} params.update(post_params) # normalize parameter keys to upper case params = {k.upper(): v for k, v in params.items()} # Search layers parameter layerparam = None for key, value in params.items(): if key.endswith(":LAYERS"): layerparam = key break if not layerparam: abort(400, "Missing <mapName>:LAYERS parameter") template = params.get('TEMPLATE') layers = params.get(layerparam, '').split(',') opacities = params.get('OPACITIES', []) if opacities: opacities = opacities.split(',') colors = params.get('COLORS', '').split(',') # extract any external WMS and WFS layers external_ows_layers = ExternalOwsLayers(qgis_server_version, app.logger) external_ows_layers.update_params(params, layerparam) # add fields from custom label queries for label_config in label_queries_config: conn = psycopg2.connect(label_config["db_url"]) sql = label_config["query"].replace("$username$", "'%s'" % (identity or "")) cursor = conn.cursor() cursor.execute(sql) row = cursor.fetchone() cursor.close() if row: for idx, param in enumerate(label_config['params']): params[param] = row[idx] conn.close() # forward to OGC service headers = {} if identity: # add authorization headers for forwarding identity app.logger.debug("Adding authorization headers for identity '%s'" % identity) access_token = create_access_token(identity) headers['Authorization'] = "Bearer " + access_token if tenant_handler.tenant_header: headers[tenant_handler.tenant_header] = request.headers.get( tenant_handler.tenant_header) url = ogc_service_url.rstrip("/") + "/" + mapid + qgs_postfix req = requests.post(url, timeout=120, data=params, headers=headers) app.logger.info("Forwarding request to %s\n%s" % (req.url, params)) response = Response(stream_with_context( req.iter_content(chunk_size=1024)), status=req.status_code) response.headers['content-type'] = req.headers['content-type'] if req.headers['content-type'] == 'application/pdf': filename = print_pdf_filename or (mapid + '.pdf') response.headers['content-disposition'] = content_disposition + \ '; filename=' + filename return response
def put(self, dataset, id): """Update a dataset feature Update dataset feature with ID from a GeoJSON Feature and return it as a GeoJSON Feature. """ args = feature_multipart_parser.parse_args() try: feature = json.loads(args['feature']) except: feature = None if not isinstance(feature, dict): api.abort(400, "feature is not an object") config_handler = RuntimeConfig("data", app.logger) config = config_handler.tenant_config(tenant_handler.tenant()) upload_user_field_suffix = config.get("upload_user_field_suffix", None) edit_user_field = config.get("edit_user_field", None) edit_timestamp_field = config.get("edit_timestamp_field", None) # Validate attachments attachments = attachments_service_handler() for key in request.files: filedata = request.files[key] attachment_valid, message = attachments.validate_attachment( dataset, filedata) if not attachment_valid: api.abort( 404, "Attachment validation failed for " + key + ": " + message) # Save attachments saved_attachments = {} internal_fields = {} for key in request.files: filedata = request.files[key] slug = attachments.save_attachment(dataset, filedata) if not slug: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) api.abort(404, "Failed to save attachment: " + key) else: saved_attachments[key] = slug field = key.lstrip("file:") feature["properties"][field] = "attachment://" + slug if upload_user_field_suffix: upload_user_field = field + "__" + upload_user_field_suffix feature["properties"][upload_user_field] = get_auth_user() internal_fields[upload_user_field] = { 'name': upload_user_field, 'data_type': 'text' } data_service = data_service_handler() prev = data_service.show(get_auth_user(), dataset, id, None) if prev: prev_feature = prev["feature"] # If a non-empty attachment field value is changed, delete the attachment keys = list(feature["properties"].keys()) for key in keys: if key in prev_feature["properties"] and prev_feature[ "properties"][key] and str( prev_feature["properties"][key]).startswith( "attachment://") and feature["properties"][ key] != prev_feature["properties"][key]: attachments.remove_attachment( dataset, prev_feature["properties"][key].lstrip( "attachment://")) if upload_user_field_suffix: upload_user_field = key + "__" + upload_user_field_suffix feature["properties"][ upload_user_field] = get_auth_user() internal_fields[upload_user_field] = { 'name': upload_user_field, 'data_type': 'text' } if edit_user_field: feature["properties"][edit_user_field] = get_auth_user() internal_fields[edit_user_field] = { 'name': edit_user_field, 'data_type': 'text' } if edit_timestamp_field: feature["properties"][edit_timestamp_field] = str(datetime.now()) internal_fields[edit_timestamp_field] = { 'name': edit_timestamp_field, 'data_type': 'text' } result = data_service.update(get_auth_user(), dataset, id, feature, internal_fields) if 'error' not in result: result['feature']['properties'] = dict( filter(lambda x: x[0] not in internal_fields, result['feature']['properties'].items())) return result['feature'] else: for slug in saved_attachments.values(): attachments.remove_attachment(dataset, slug) error_code = result.get('error_code') or 404 error_details = result.get('error_details') or {} api.abort(error_code, result['error'], **error_details)
def get(self, program, path): link = self.__get_link(get_auth_user(), program, path, request.args) req = requests.get(link, stream=True, timeout=10) return self.__get_response(req)