def geojson(self, r, **attr): """ Render the pivot table data as a dict ready to be exported as GeoJSON for display on a Map. @param r: the S3Request instance @param attr: controller attributes for the request """ resource = self.resource response = current.response s3 = response.s3 # Set response headers response.headers["Content-Type"] = s3.content_type.get( "geojson", "application/json") # Filter s3_filter = s3.filter if s3_filter is not None: resource.add_filter(s3_filter) if not resource.count(): # No Data return json.dumps({}) # Extract the relevant GET vars get_vars = r.get_vars layer_id = r.get_vars.get("layer", None) level = get_vars.get("level", "L0") # Fall back to report options defaults get_config = resource.get_config report_options = get_config("report_options", {}) defaults = report_options.get("defaults", {}) # The rows dimension context = get_config("context") if context and "location" in context: # @ToDo: We can add sanity-checking using resource.parse_bbox_query() as a guide if-desired rows = "(location)$%s" % level else: # Fallback to location_id rows = "location_id$%s" % level # Fallback we can add if-required #rows = "site_id$location_id$%s" % level # Filter out null values resource.add_filter(FS(rows) != None) # Set XSLT stylesheet stylesheet = os.path.join(r.folder, r.XSLT_PATH, "geojson", "export.xsl") # Do we have any data at this level of aggregation? fallback_to_points = True # @ToDo: deployment_setting? output = None if fallback_to_points: if resource.count() == 0: # Show Points resource.clear_query() # Apply URL filters (especially BBOX) resource.build_query(filter=s3_filter, vars=get_vars) # Extract the Location Data xmlformat = S3XMLFormat(stylesheet) include, exclude = xmlformat.get_fields(resource.tablename) resource.load(fields=include, skip=exclude, start=0, limit=None, orderby=None, virtual=False, cacheable=True) gis = current.gis attr_fields = [] style = gis.get_style(layer_id=layer_id, aggregate=False) popup_format = style.popup_format if popup_format: if "T(" in popup_format: # i18n T = current.T items = regex_translate.findall(popup_format) for item in items: titem = str(T(item[1:-1])) popup_format = popup_format.replace( "T(%s)" % item, titem) style.popup_format = popup_format # Extract the attr_fields parts = popup_format.split("{") # Skip the first part parts = parts[1:] for part in parts: attribute = part.split("}")[0] attr_fields.append(attribute) attr_fields = ",".join(attr_fields) location_data = gis.get_location_data(resource, attr_fields=attr_fields) # Export as GeoJSON current.xml.show_ids = True output = resource.export_xml( fields=include, mcomponents=None, references=[], stylesheet=stylesheet, as_json=True, location_data=location_data, map_data=dict(style=style), ) # Transformation error? if not output: r.error( 400, "XSLT Transformation Error: %s " % current.xml.error) else: while resource.count() == 0: # Try a lower level of aggregation level = int(level[1:]) if level == 0: # Nothing we can display return json.dumps({}) resource.clear_query() # Apply URL filters (especially BBOX) resource.build_query(filter=s3_filter, vars=get_vars) level = "L%s" % (level - 1) if context and "location" in context: # @ToDo: We can add sanity-checking using resource.parse_bbox_query() as a guide if-desired rows = "(location)$%s" % level else: # Fallback to location_id rows = "location_id$%s" % level # Fallback we can add if-required #rows = "site_id$location_id$%s" % level resource.add_filter(FS(rows) != None) if not output: # Build the Pivot Table cols = None layer = get_vars.get("fact", defaults.get("fact", "count(id)")) m = layer_pattern.match(layer) selector, method = m.group(2), m.group(1) prefix = resource.prefix_selector selector = prefix(selector) layer = (selector, method) pivottable = resource.pivottable(rows, cols, [layer]) # Extract the Location Data #attr_fields = [] style = current.gis.get_style(layer_id=layer_id, aggregate=True) popup_format = style.popup_format if popup_format: if "T(" in popup_format: # i18n T = current.T items = regex_translate.findall(popup_format) for item in items: titem = str(T(item[1:-1])) popup_format = popup_format.replace( "T(%s)" % item, titem) style.popup_format = popup_format # Extract the attr_fields # No need as defaulted inside S3PivotTable.geojson() #parts = popup_format.split("{") ## Skip the first part #parts = parts[1:] #for part in parts: # attribute = part.split("}")[0] # attr_fields.append(attribute) #attr_fields = ",".join(attr_fields) ids, location_data = pivottable.geojson(layer=layer, level=level) # Export as GeoJSON current.xml.show_ids = True gresource = current.s3db.resource("gis_location", id=ids) output = gresource.export_xml( fields=[], mcomponents=None, references=[], stylesheet=stylesheet, as_json=True, location_data=location_data, # Tell the client that we are # displaying aggregated data and # the level it is aggregated at map_data=dict(level=int(level[1:]), style=style), ) # Transformation error? if not output: r.error(400, "XSLT Transformation Error: %s " % current.xml.error) return output
def geojson(self, r, **attr): """ Render the pivot table data as a dict ready to be exported as GeoJSON for display on a Map. @param r: the S3Request instance @param attr: controller attributes for the request """ resource = self.resource response = current.response s3 = response.s3 # Set response headers response.headers["Content-Type"] = s3.content_type.get("geojson", "application/json") # Filter s3_filter = s3.filter if s3_filter is not None: resource.add_filter(s3_filter) if not resource.count(): # No Data return json.dumps({}) # Extract the relevant GET vars get_vars = r.get_vars layer_id = r.get_vars.get("layer", None) level = get_vars.get("level", "L0") # Fall back to report options defaults get_config = resource.get_config report_options = get_config("report_options", {}) defaults = report_options.get("defaults", {}) # The rows dimension context = get_config("context") if context and "location" in context: # @ToDo: We can add sanity-checking using resource.parse_bbox_query() as a guide if-desired rows = "(location)$%s" % level else: # Fallback to location_id rows = "location_id$%s" % level # Fallback we can add if-required #rows = "site_id$location_id$%s" % level # Filter out null values resource.add_filter(FS(rows) != None) # Set XSLT stylesheet stylesheet = os.path.join(r.folder, r.XSLT_PATH, "geojson", "export.xsl") # Do we have any data at this level of aggregation? fallback_to_points = True # @ToDo: deployment_setting? output = None if fallback_to_points: if resource.count() == 0: # Show Points resource.clear_query() # Apply URL filters (especially BBOX) resource.build_query(filter=s3_filter, vars=get_vars) # Extract the Location Data xmlformat = S3XMLFormat(stylesheet) include, exclude = xmlformat.get_fields(resource.tablename) resource.load(fields=include, skip=exclude, start=0, limit=None, orderby=None, virtual=False, cacheable=True) gis = current.gis attr_fields = [] style = gis.get_style(layer_id=layer_id, aggregate=False) popup_format = style.popup_format if popup_format: if "T(" in popup_format: # i18n T = current.T items = regex_translate.findall(popup_format) for item in items: titem = str(T(item[1:-1])) popup_format = popup_format.replace("T(%s)" % item, titem) style.popup_format = popup_format # Extract the attr_fields parts = popup_format.split("{") # Skip the first part parts = parts[1:] for part in parts: attribute = part.split("}")[0] attr_fields.append(attribute) attr_fields = ",".join(attr_fields) location_data = gis.get_location_data(resource, attr_fields=attr_fields) # Export as GeoJSON current.xml.show_ids = True output = resource.export_xml(fields=include, mcomponents=None, references=[], stylesheet=stylesheet, as_json=True, location_data=location_data, map_data=dict(style=style), ) # Transformation error? if not output: r.error(400, "XSLT Transformation Error: %s " % current.xml.error) else: while resource.count() == 0: # Try a lower level of aggregation level = int(level[1:]) if level == 0: # Nothing we can display return json.dumps({}) resource.clear_query() # Apply URL filters (especially BBOX) resource.build_query(filter=s3_filter, vars=get_vars) level = "L%s" % (level - 1) if context and "location" in context: # @ToDo: We can add sanity-checking using resource.parse_bbox_query() as a guide if-desired rows = "(location)$%s" % level else: # Fallback to location_id rows = "location_id$%s" % level # Fallback we can add if-required #rows = "site_id$location_id$%s" % level resource.add_filter(FS(rows) != None) if not output: # Build the Pivot Table cols = None layer = get_vars.get("fact", defaults.get("fact", "count(id)")) m = layer_pattern.match(layer) selector, method = m.group(2), m.group(1) prefix = resource.prefix_selector selector = prefix(selector) layer = (selector, method) pivottable = resource.pivottable(rows, cols, [layer]) # Extract the Location Data #attr_fields = [] style = current.gis.get_style(layer_id=layer_id, aggregate=True) popup_format = style.popup_format if popup_format: if"T(" in popup_format: # i18n T = current.T items = regex_translate.findall(popup_format) for item in items: titem = str(T(item[1:-1])) popup_format = popup_format.replace("T(%s)" % item, titem) style.popup_format = popup_format # Extract the attr_fields # No need as defaulted inside S3PivotTable.geojson() #parts = popup_format.split("{") ## Skip the first part #parts = parts[1:] #for part in parts: # attribute = part.split("}")[0] # attr_fields.append(attribute) #attr_fields = ",".join(attr_fields) ids, location_data = pivottable.geojson(layer=layer, level=level) # Export as GeoJSON current.xml.show_ids = True gresource = current.s3db.resource("gis_location", id=ids) output = gresource.export_xml(fields=[], mcomponents=None, references=[], stylesheet=stylesheet, as_json=True, location_data=location_data, # Tell the client that we are # displaying aggregated data and # the level it is aggregated at map_data=dict(level=int(level[1:]), style=style), ) # Transformation error? if not output: r.error(400, "XSLT Transformation Error: %s " % current.xml.error) return output